library(rstan)
Loading required package: StanHeaders
Loading required package: ggplot2
rstan (Version 2.21.1, GitRev: 2e1f913d3ca3)
For execution on a local, multicore CPU with excess RAM we recommend calling
options(mc.cores = parallel::detectCores()).
To avoid recompilation of unchanged Stan programs, we recommend calling
rstan_options(auto_write = TRUE)
library(survival)
library(tidyverse)
Registered S3 methods overwritten by 'dbplyr':
  method         from
  print.tbl_lazy     
  print.tbl_sql      
── Attaching packages ─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────── tidyverse 1.3.1 ──
✓ tibble  3.1.4     ✓ dplyr   1.0.7
✓ tidyr   1.1.3     ✓ stringr 1.4.0
✓ readr   2.0.1     ✓ forcats 0.5.1
✓ purrr   0.3.4     
── Conflicts ────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────── tidyverse_conflicts() ──
x tidyr::extract() masks rstan::extract()
x dplyr::filter()  masks stats::filter()
x dplyr::lag()     masks stats::lag()
library(tidybayes)
library(scales)

Attaching package: ‘scales’

The following object is masked from ‘package:purrr’:

    discard

The following object is masked from ‘package:readr’:

    col_factor
# data, parameters, model and generated quantities blocks
Stan_exponential_survival_model <- "
data{
  int <lower=1> N_uncensored;
  int <lower=1> N_censored;
  int <lower=0> numCovariates;
  matrix[N_censored, numCovariates] X_censored;
  matrix[N_uncensored, numCovariates] X_uncensored;
  vector <lower=0>[N_censored] times_censored;
  vector <lower=0>[N_uncensored] times_uncensored;
}

parameters{
  vector[numCovariates] beta; //regression coefficients
  real alpha; //intercept
}

model{
  beta ~ normal(0,10); //prior on regression coefficients
  alpha ~ normal(0,10); //prior on intercept
  target += exponential_lpdf(times_uncensored | exp(alpha+X_uncensored * beta)); //log-likelihood part for uncensored times
  target += exponential_lccdf(times_censored | exp(alpha+X_censored * beta)); //log-likelihood for censored times
}

generated quantities{
  vector[N_uncensored] times_uncensored_sampled; //prediction of death
  for(i in 1:N_uncensored) {
    times_uncensored_sampled[i] = exponential_rng(exp(alpha+X_uncensored[i,]* beta));
  }
}
"
# prepare the data
set.seed(42); 
require (tidyverse);
data <- read_csv('../data/necessary_fields.csv')
Rows: 2066 Columns: 7
── Column specification ────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
Delimiter: ","
chr (1): host_type
dbl (1): duration_months
lgl (5): major_releases, censored, high_rev_freq, multi_repo, high_author_count

ℹ Use `spec()` to retrieve the full column specification for this data.
ℹ Specify the column types or set `show_col_types = FALSE` to quiet this message.
N <- nrow (data);
data$high_rev_freq <- car::recode(data$high_rev_freq, "'TRUE' = 0; 'FALSE' = 1")
data$censored <- car::recode(data$censored, "'TRUE' = 0; 'FALSE' = 1")
X <- as.matrix(pull(data, high_rev_freq)); 
is_censored <- pull(data, censored)==0; 
times <- pull(data, duration_months); 
msk_censored <- is_censored == 1; 
N_censored <- sum(msk_censored);
# put data into a list for Stan
Stan_data <- list (N_uncensored = N - N_censored, 
                    N_censored = N_censored,
                    numCovariates = ncol(X), 
                    X_censored = as.matrix(X[msk_censored,]),
                    X_uncensored = as.matrix(X[!msk_censored ,]), 
                    times_censored = times[msk_censored],
                    times_uncensored = times[!msk_censored])
# fit Stan model
require(rstan)
exp_surv_model_fit <- suppressMessages(stan(model_code = Stan_exponential_survival_model, data = Stan_data))
sh: Data/bayesian: No such file or directory
Warning in system2(CXX, args = ARGS) :sh: clang++ -mmacosx-version-min=10.13: command not found
 error in running command
Warning in file.remove(c(unprocessed, processed)) :
  cannot remove file '/var/folders/q8/7tchbyvd1dj3hkgw5ffkk6ph0000gp/T//RtmplqRJNu/file2f57570bf8c3.stan', reason 'No such file or directory'

SAMPLING FOR MODEL 'bf5dbbde6a245330de71a285e3fe7c42' NOW (CHAIN 1).
Chain 1: 
Chain 1: Gradient evaluation took 0.000328 seconds
Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 3.28 seconds.
Chain 1: Adjust your expectations accordingly!
Chain 1: 
Chain 1: 
Chain 1: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 1: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 1: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 1: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 1: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 1: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 1: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 1: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 1: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 1: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 1: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 1: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 1: 
Chain 1:  Elapsed Time: 1.83079 seconds (Warm-up)
Chain 1:                1.68864 seconds (Sampling)
Chain 1:                3.51943 seconds (Total)
Chain 1: 

SAMPLING FOR MODEL 'bf5dbbde6a245330de71a285e3fe7c42' NOW (CHAIN 2).
Chain 2: 
Chain 2: Gradient evaluation took 0.00019 seconds
Chain 2: 1000 transitions using 10 leapfrog steps per transition would take 1.9 seconds.
Chain 2: Adjust your expectations accordingly!
Chain 2: 
Chain 2: 
Chain 2: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 2: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 2: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 2: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 2: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 2: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 2: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 2: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 2: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 2: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 2: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 2: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 2: 
Chain 2:  Elapsed Time: 1.8193 seconds (Warm-up)
Chain 2:                1.8671 seconds (Sampling)
Chain 2:                3.6864 seconds (Total)
Chain 2: 

SAMPLING FOR MODEL 'bf5dbbde6a245330de71a285e3fe7c42' NOW (CHAIN 3).
Chain 3: 
Chain 3: Gradient evaluation took 0.000243 seconds
Chain 3: 1000 transitions using 10 leapfrog steps per transition would take 2.43 seconds.
Chain 3: Adjust your expectations accordingly!
Chain 3: 
Chain 3: 
Chain 3: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 3: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 3: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 3: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 3: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 3: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 3: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 3: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 3: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 3: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 3: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 3: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 3: 
Chain 3:  Elapsed Time: 1.71755 seconds (Warm-up)
Chain 3:                1.83289 seconds (Sampling)
Chain 3:                3.55044 seconds (Total)
Chain 3: 

SAMPLING FOR MODEL 'bf5dbbde6a245330de71a285e3fe7c42' NOW (CHAIN 4).
Chain 4: 
Chain 4: Gradient evaluation took 0.000199 seconds
Chain 4: 1000 transitions using 10 leapfrog steps per transition would take 1.99 seconds.
Chain 4: Adjust your expectations accordingly!
Chain 4: 
Chain 4: 
Chain 4: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 4: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 4: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 4: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 4: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 4: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 4: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 4: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 4: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 4: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 4: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 4: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 4: 
Chain 4:  Elapsed Time: 1.7718 seconds (Warm-up)
Chain 4:                1.5937 seconds (Sampling)
Chain 4:                3.3655 seconds (Total)
Chain 4: 
# print model fit
print(get_seed(exp_surv_model_fit))
[1] 1781592037
# print fit summary
fit_summary <- summary(exp_surv_model_fit)
print(fit_summary$summary)
                                       mean     se_mean           sd          2.5%           25%           50%           75%         97.5%    n_eff      Rhat
beta[1]                          -0.3649827 0.002232828   0.07775502    -0.5159804    -0.4190547    -0.3647768    -0.3122201    -0.2109212 1212.680 1.0015373
alpha                            -4.5545737 0.001898961   0.06552380    -4.6873049    -4.5989760    -4.5528496    -4.5100872    -4.4313723 1190.601 1.0033388
times_uncensored_sampled[1]     132.6874082 2.121700110 134.00227672     2.8319295    36.4495443    89.7169422   186.4951063   487.9319018 3988.929 0.9994060
times_uncensored_sampled[2]     133.5689338 2.071923203 131.26947611     3.5702591    39.9063472    95.1670550   181.4523861   487.5108726 4014.026 1.0002691
times_uncensored_sampled[3]     134.8286784 2.217946247 138.27126474     2.9770698    38.5223488    93.4905547   185.4026691   488.4504457 3886.528 0.9997243
times_uncensored_sampled[4]     136.0198469 2.212850972 137.63204679     2.7234239    38.1322622    90.8373023   187.6302059   498.9298274 3868.431 1.0000950
times_uncensored_sampled[5]     134.4376128 2.176116701 133.97287843     3.7846059    38.7642386    91.7887850   186.9413625   495.6214616 3790.264 1.0021473
times_uncensored_sampled[6]     135.4165352 2.198448004 132.44705770     3.1923859    39.8255634    96.8336629   187.6686040   490.7169799 3629.545 1.0005374
times_uncensored_sampled[7]     139.8119162 2.294297454 141.57790135     4.1169510    40.9278116    96.0606123   192.2967936   526.6133416 3807.952 0.9995263
times_uncensored_sampled[8]     137.6954022 2.040576194 134.78179704     3.5351123    39.2472526    97.5326819   191.4360844   493.7552786 4362.715 1.0000152
times_uncensored_sampled[9]     136.1238977 2.136521840 135.65093734     3.3500869    39.3982507    94.7162320   190.1865676   491.0063090 4031.168 1.0003850
times_uncensored_sampled[10]     94.7821688 1.539521854  95.87968998     2.4918721    26.6021636    65.3213917   130.7728017   356.2753139 3878.658 1.0000222
times_uncensored_sampled[11]    137.1595561 2.299564861 138.00910880     3.5643348    38.1216226    92.5895703   191.3460877   507.0725713 3601.838 0.9991946
times_uncensored_sampled[12]    135.1701665 2.084413547 132.26309832     3.4942368    39.3934383    93.5070097   191.1570572   477.8703389 4026.332 0.9996017
times_uncensored_sampled[13]    137.6016479 2.193185524 137.31510192     3.1744808    40.3921844    97.9480871   186.2470856   519.9309017 3919.998 1.0003844
times_uncensored_sampled[14]    138.3907711 2.170488768 138.12386992     3.7176894    39.4772865    95.6273592   190.4885172   499.6262344 4049.696 1.0000400
times_uncensored_sampled[15]     94.0215749 1.495059339  91.42272356     2.2724948    26.7715300    66.2028400   133.0524473   333.0054722 3739.310 0.9994917
times_uncensored_sampled[16]    136.7803735 2.228600608 138.45309173     3.3170996    39.8902211    92.7244536   191.6840199   513.9880298 3859.587 1.0002426
times_uncensored_sampled[17]    132.8014655 2.125140893 132.69908138     3.6749703    38.9717079    91.0404926   185.4382082   485.8952870 3899.064 0.9997887
times_uncensored_sampled[18]    135.4579499 2.174827360 135.51159184     3.7484822    40.1920075    94.8009090   185.6466213   493.0693466 3882.427 1.0000167
times_uncensored_sampled[19]     97.1965100 1.548170422  96.31068704     2.9926589    28.7559922    69.0085310   133.7994761   363.5654345 3870.004 1.0008745
times_uncensored_sampled[20]     91.6542876 1.405038293  88.77274272     1.8324030    26.5906447    64.3320156   128.3492961   325.2086720 3991.930 1.0001730
times_uncensored_sampled[21]    136.4361217 2.143989338 135.76242592     3.5784434    38.2992114    94.6134407   192.9765517   502.1270810 4009.719 1.0005456
times_uncensored_sampled[22]    137.2823231 2.253657521 135.52312535     4.1268955    41.3109175    95.4053943   191.7666291   504.2493900 3616.188 0.9993997
times_uncensored_sampled[23]    133.9494976 2.113569528 135.17099650     2.7333145    38.9769816    92.3348048   183.7625084   489.0476277 4090.100 0.9997809
times_uncensored_sampled[24]    140.0721131 2.231795706 139.91323468     3.8255814    40.4150603    98.1576089   194.3122398   519.2952336 3930.146 1.0001119
times_uncensored_sampled[25]    135.4089451 2.192686994 134.19440961     3.5525630    40.2017688    95.6346322   188.8038006   494.2907665 3745.550 0.9996147
times_uncensored_sampled[26]     98.0401415 1.520796463  98.90114880     2.2501314    27.0451514    65.0992112   136.9478797   355.7213883 4229.222 0.9995280
times_uncensored_sampled[27]     96.4010648 1.674855830 100.96631074     2.0794912    27.3760581    63.9970614   130.3797832   373.4257306 3634.110 0.9998301
times_uncensored_sampled[28]    135.7643439 2.114407928 134.23160519     3.2244823    41.1650505    96.8530747   185.6671581   478.5990351 4030.250 0.9995103
times_uncensored_sampled[29]     94.5356062 1.536552842  94.29507168     2.5784179    27.2351844    66.2691384   132.2976458   354.4975318 3766.023 1.0002958
times_uncensored_sampled[30]     95.8141589 1.602463360  97.88923630     2.4084404    27.6145962    65.9222659   130.9119950   351.7248928 3731.588 0.9994790
times_uncensored_sampled[31]    138.8237278 2.218599560 140.79109249     3.0371933    40.1767605    96.0277846   193.9579518   522.9393538 4027.101 0.9995683
times_uncensored_sampled[32]    137.4225660 2.333135235 140.94515239     3.9501599    38.9754700    93.3019589   187.6626566   515.0759954 3649.392 1.0001848
times_uncensored_sampled[33]     96.0305942 1.585831659  96.32104425     2.5296587    29.0271234    66.6241290   133.1045366   359.0082642 3689.166 1.0005223
times_uncensored_sampled[34]     96.5429896 1.552022708  97.68540795     2.3535224    26.6657391    67.5228065   133.0536843   363.5748228 3961.533 0.9999344
times_uncensored_sampled[35]     96.0888860 1.537683290  96.10228103     2.3081371    27.3769903    66.3915700   134.1033510   354.4852771 3906.012 0.9996702
times_uncensored_sampled[36]    139.9622282 2.205924753 139.82965732     3.3599433    38.8298554    96.2264720   195.6529209   521.0919368 4018.067 1.0007824
times_uncensored_sampled[37]     96.2366612 1.507375668  95.31584199     2.2723477    27.2264199    66.8482072   133.1978449   351.6069654 3998.409 0.9998617
times_uncensored_sampled[38]    137.1468847 2.178582277 137.83085076     3.8122744    40.9827772    94.5387018   188.0067508   512.4875811 4002.625 1.0007099
times_uncensored_sampled[39]    136.9559731 2.129868776 131.18256265     3.8809618    40.5699390    97.6518571   194.1222264   482.4515947 3793.556 1.0002314
times_uncensored_sampled[40]    134.3941722 2.198200490 134.58333773     2.9759864    38.6177341    92.5137427   184.8347685   506.6518075 3748.418 1.0002629
times_uncensored_sampled[41]    137.2649493 2.136017336 136.10210944     3.5752843    40.6257043    95.7336082   191.5560511   492.4909464 4059.945 0.9999956
times_uncensored_sampled[42]    139.3287098 2.206801880 139.38581867     3.2472102    41.7953361    97.3158537   193.9968848   524.4541341 3989.427 0.9994068
times_uncensored_sampled[43]    137.3903065 2.117193971 138.26868262     3.8106854    39.3034864    95.2655681   188.0904639   511.0993348 4265.072 0.9996977
times_uncensored_sampled[44]    137.8959316 2.141077542 137.33798690     2.8254635    39.1666214    94.8520110   194.8051759   503.2900503 4114.495 1.0003376
times_uncensored_sampled[45]    138.9513600 2.225880746 138.54733603     3.7872933    40.7285125    96.9378019   193.8078153   502.5378217 3874.294 0.9995231
times_uncensored_sampled[46]    136.7308339 2.178832898 138.29285366     3.5472102    39.7428320    94.6886941   187.3119298   486.7683975 4028.577 0.9994574
times_uncensored_sampled[47]     97.4584434 1.536392980  98.06888211     2.0659110    27.2722133    66.6048780   134.8860481   370.4744286 4074.345 0.9997953
times_uncensored_sampled[48]    137.5023873 2.258224851 136.13490360     3.2084824    38.0729219    97.0283203   191.4874359   512.7140825 3634.165 0.9996261
times_uncensored_sampled[49]     96.2697539 1.489124116  94.33937803     2.8083081    27.9572684    67.1286090   134.4584238   345.2488083 4013.509 0.9996927
times_uncensored_sampled[50]    133.6061072 2.165873290 133.73423312     2.9395476    39.4957625    94.0515647   183.5115194   488.1563007 3812.581 0.9997716
times_uncensored_sampled[51]     95.8438199 1.523088604  96.61732877     2.5382947    27.0727459    67.2519448   132.7650623   355.3816679 4024.016 1.0012169
times_uncensored_sampled[52]    140.9192254 2.257014946 142.93106388     3.6001100    41.7362838    96.4259113   194.9556417   516.8226323 4010.369 0.9999976
times_uncensored_sampled[53]    133.8834902 2.141579977 133.90763532     3.4298326    39.4317680    89.9654327   187.5873042   488.6131871 3909.688 1.0004141
times_uncensored_sampled[54]    138.3663070 2.301008963 140.87285108     3.9143894    40.0334752    95.1300446   188.8701944   516.5999622 3748.159 0.9994086
times_uncensored_sampled[55]    133.8124881 2.145927440 134.13012079     3.9903781    39.7243492    91.1799349   187.9093366   493.8997459 3906.812 1.0003107
times_uncensored_sampled[56]    134.1169729 2.134546055 132.40372691     2.7036041    39.6870738    94.5609824   188.2669877   479.6612769 3847.595 0.9994266
times_uncensored_sampled[57]     93.8980869 1.497776663  94.75411436     2.3746733    26.9215121    64.8648469   129.2218706   348.2839060 4002.230 1.0009670
times_uncensored_sampled[58]    138.1174078 2.162763220 136.51879611     3.6991589    39.4868168    95.2712766   192.5371805   497.1312827 3984.437 1.0015540
times_uncensored_sampled[59]    134.4651870 2.111079764 136.03763642     3.3822801    37.9554707    92.5136874   187.4853175   498.7337323 4152.493 0.9998781
times_uncensored_sampled[60]    139.8327213 2.342794157 140.93105520     3.6301434    39.5455319    94.7607735   192.0568379   517.4477698 3618.638 0.9997617
times_uncensored_sampled[61]    136.2742391 2.438046602 139.57288014     3.0803738    37.5263300    94.8177578   190.2470968   495.8188581 3277.314 1.0006995
times_uncensored_sampled[62]    135.1607383 2.128929359 135.26098408     2.7878667    37.7186939    94.6196479   191.1354146   479.4386337 4036.664 0.9995670
times_uncensored_sampled[63]    135.2470615 2.192055843 138.52094513     3.4684822    38.2161006    92.5615273   186.4949739   507.9012568 3993.261 0.9993113
times_uncensored_sampled[64]    135.3424605 2.243042549 135.84608939     2.9191510    39.3841249    92.7574288   186.5482119   494.3895008 3667.915 1.0006108
times_uncensored_sampled[65]    135.1405810 2.111391276 134.68114650     3.5005091    40.0071145    93.7491233   190.2608680   484.5130155 4068.892 0.9991186
times_uncensored_sampled[66]     91.7364705 1.490034607  91.13875367     2.4822086    26.7021565    64.4118084   125.9383157   342.4072528 3741.222 0.9995606
times_uncensored_sampled[67]    139.7910595 2.185952933 139.63083488     3.2654528    39.0764083    99.2906866   194.5613763   510.0090479 4080.196 0.9994884
times_uncensored_sampled[68]    135.0191620 2.256558809 136.09009890     2.9777031    37.6232936    91.1868727   188.5479612   510.5580013 3637.138 0.9999357
times_uncensored_sampled[69]     96.3066726 1.559727877  97.93169069     2.5503495    27.5505327    65.4624417   133.7871626   360.7555705 3942.292 1.0002989
times_uncensored_sampled[70]    139.3171320 2.160383032 137.67069963     3.3680352    39.8252196    95.2233174   197.2352111   518.9269960 4060.893 0.9992903
times_uncensored_sampled[71]    135.6574831 2.151444856 138.16713011     3.3830935    37.3416934    94.2791316   186.3510097   506.2150776 4124.288 0.9997161
times_uncensored_sampled[72]     93.9951957 1.499023790  93.72521941     2.5329162    26.5968717    65.5350747   130.7835448   335.1167144 3909.272 1.0005162
times_uncensored_sampled[73]     96.9970985 1.526729225  97.41149758     2.7163729    27.7512095    65.5314920   134.4233130   355.7020917 4070.956 1.0008935
times_uncensored_sampled[74]    140.3125457 2.331101938 142.94348395     3.8027291    38.4905007    96.0303908   194.2493044   531.5577872 3760.159 0.9995171
times_uncensored_sampled[75]    138.6640864 2.432262353 139.68724747     3.4157998    39.9455458    97.3505102   191.1761148   514.1924127 3298.319 1.0001069
times_uncensored_sampled[76]    137.1412113 2.206281990 137.69786615     3.9273375    41.4939549    94.8178913   188.5469096   521.3583534 3895.223 0.9995407
times_uncensored_sampled[77]    141.0658124 2.204367006 141.04744921     3.6378271    41.0848469    98.0411310   196.8092895   529.5515640 4094.140 0.9996702
times_uncensored_sampled[78]    137.1473772 2.103980734 134.50532092     3.1822686    40.0753288    97.2504269   189.8149312   502.4076220 4086.913 0.9999990
times_uncensored_sampled[79]    134.1241423 2.164493345 134.18322539     3.2759498    38.7837483    92.2257152   187.1611319   498.3950846 3843.120 1.0002356
times_uncensored_sampled[80]    136.3176563 2.294870199 139.54523135     4.5506186    38.6971292    91.4142683   186.9817591   526.2453750 3697.547 1.0002236
times_uncensored_sampled[81]     94.8908804 1.524912560  94.29482960     2.5801349    26.1272132    65.1896881   133.3584653   338.4507867 3823.718 0.9998350
times_uncensored_sampled[82]     95.7510508 1.542113974  95.70360799     2.2393064    27.1836222    66.9440756   133.0073902   355.2384425 3851.445 0.9997376
times_uncensored_sampled[83]     92.5524427 1.563423189  96.37416412     2.1329944    25.8566389    62.4966307   129.4590233   366.7484340 3799.865 1.0000561
times_uncensored_sampled[84]     95.5561583 1.508490258  95.74040677     2.3633874    27.4078948    66.9862879   134.5906970   355.8743675 4028.149 1.0004378
times_uncensored_sampled[85]     94.9599036 1.514618283  98.25724584     2.7180414    25.7951090    64.9186137   130.9362143   368.8392397 4208.456 0.9995143
times_uncensored_sampled[86]    139.5095668 2.333776912 141.46558773     3.5264812    40.0612705    95.2568817   192.0983541   519.3689488 3674.370 1.0001556
times_uncensored_sampled[87]    132.6709022 2.292633411 133.42729498     3.3576725    36.8865469    93.0989771   184.3824289   495.0357334 3387.038 0.9998777
times_uncensored_sampled[88]     94.2938280 1.435716829  95.37421979     2.2327655    27.3102945    63.9783467   128.1350643   347.5332280 4412.903 1.0013069
times_uncensored_sampled[89]    135.7667868 2.166569727 136.11472275     3.7644703    39.2543046    94.8479456   188.3003221   491.5146699 3946.979 0.9997486
times_uncensored_sampled[90]     93.7145469 1.498004575  92.99583822     2.4359146    26.8761892    65.3872135   130.9443147   334.2104668 3853.903 1.0000057
times_uncensored_sampled[91]    138.1580236 2.375932351 135.74635543     4.0676492    41.2572274    97.8246092   192.4081017   504.7281456 3264.286 1.0010398
times_uncensored_sampled[92]     94.5134600 1.537972584  97.17953050     2.1069141    26.3810416    63.3921128   132.6665275   353.4782425 3992.569 1.0008520
times_uncensored_sampled[93]    136.3432353 2.259713366 139.27222642     4.0845205    39.4949419    92.3869815   186.0876962   512.6528484 3798.589 0.9997829
times_uncensored_sampled[94]     96.0213041 1.442528587  94.15896587     2.3620150    27.5832052    66.3557851   137.0656443   354.9370064 4260.637 0.9994801
times_uncensored_sampled[95]     96.5478941 1.586614369 101.53591227     2.3155222    26.2600870    65.8676583   129.8295324   374.2373212 4095.402 1.0000611
times_uncensored_sampled[96]    135.4466053 2.147097961 133.28368551     2.9801564    39.7669876    94.6333657   185.6312768   491.9182554 3853.455 1.0005457
times_uncensored_sampled[97]     98.4556001 1.566499538  99.20537159     3.0011606    28.0701688    69.6747813   134.7435288   367.2437428 4010.604 1.0000670
times_uncensored_sampled[98]    138.5079341 2.387405719 139.70336914     3.6575344    41.4444577    95.0014039   188.0542930   509.7029948 3424.217 1.0004408
 [ reached getOption("max.print") -- omitted 678 rows ]
exp_surv_model_draws <- tidybayes::tidy_draws(exp_surv_model_fit)
exp_surv_model_draws
## Constructor for Strata-specific survival function
construct_survival_function <- function(alpha, beta, x) {
    function(t) {
        lambda <- exp(alpha + x*beta)
        exp(-(lambda * t))
    }
}

## Random functions
exp_surv_model_surv_func <-
    exp_surv_model_draws %>%
    select(.chain, .iteration, .draw, alpha, `beta[1]`) %>%
    ## Simplify name
    rename(beta = `beta[1]`) %>%
    ## Construct realization of random functions
    mutate(`S(t|1)` = pmap(list(alpha, beta), function(a,b) {construct_survival_function(a,b,1)}),
           `S(t|0)` = pmap(list(alpha, beta), function(a,b) {construct_survival_function(a,b,0)}))
exp_surv_model_surv_func
times <- seq(from = 0, to = 165, by = 0.1)
times_df <- data_frame(t = times)
Warning: `data_frame()` was deprecated in tibble 1.1.0.
Please use `tibble()` instead.
This warning is displayed once every 8 hours.
Call `lifecycle::last_lifecycle_warnings()` to see where this warning was generated.
## Try first realizations
exp_surv_model_surv_func$`S(t|1)`[[1]](times[1:10])
 [1] 1.0000000 0.9992512 0.9985030 0.9977554 0.9970083 0.9962618 0.9955158 0.9947704 0.9940256 0.9932813
exp_surv_model_surv_func$`S(t|0)`[[1]](times[1:10])
 [1] 1.0000000 0.9989270 0.9978552 0.9967845 0.9957150 0.9946466 0.9935794 0.9925133 0.9914484 0.9903846
## Apply all realizations
exp_surv_model_survival <-
    exp_surv_model_surv_func %>%
    mutate(times_df = list(times_df)) %>%
    mutate(times_df = pmap(list(times_df, `S(t|1)`, `S(t|0)`),
                           function(df, s1, s0) {df %>% mutate(s1 = s1(t),
                                                               s0 = s0(t))})) %>%
    select(-`S(t|1)`, -`S(t|0)`) %>%
    unnest(cols = c(times_df)) %>%
    gather(key = Strata, value = survival, s1, s0) %>%
    mutate(Strata = factor(Strata, # Strata is whether or not projects have high commit frequency
                              levels = c("s1","s0"),
                              labels = c("rev. per day > 1","rev. per day <= 1")))

## Average on survival scale
exp_surv_model_survival_mean <-
    exp_surv_model_survival %>%
    group_by(Strata, t) %>%
    summarize(survival_mean = mean(survival),
              survival_95upper = quantile(survival, probs = 0.975),
              survival_95lower = quantile(survival, probs = 0.025))
`summarise()` has grouped output by 'Strata'. You can override using the `.groups` argument.
exp_surv_model_survival
# plot the graphs
(ggplot(data = exp_surv_model_survival, mapping = aes(x = t, y = survival, color = Strata, group = interaction(.chain,.draw,Strata))) 
 + geom_line(size = 0.1, alpha = 0.02) 
 + geom_line(data = exp_surv_model_survival_mean, mapping = aes(y = survival_mean, group = Strata)) 
 + geom_line(data = exp_surv_model_survival_mean, mapping = aes(y = survival_95upper, group = Strata), linetype = "dotted") 
 + geom_line(data = exp_surv_model_survival_mean, mapping = aes(y = survival_95lower, group = Strata), linetype = "dotted")
 + scale_color_hue(direction = -1)
 + scale_y_continuous(labels = percent_format(), limits=c(0,1))
 +labs(x = "Time (Months)", y = "Survival probability")
 + theme_classic()
 + theme(axis.text.x = element_text(angle = 90, vjust = 0.5), legend.key = element_blank(), legend.position = 'top', plot.title = element_text(hjust = 0.5), strip.background = element_blank()))

LS0tCnRpdGxlOiAiQmF5ZXNpYW4gc3Vydml2YWwgYW5hbHlzaXMgdXNpbmcgY29tbWl0IGZyZXF1ZW5jeSBhcyBhIHByZWRpY3RvciIKb3V0cHV0OiBodG1sX25vdGVib29rCi0tLQpgYGB7cn0KbGlicmFyeShyc3RhbikKbGlicmFyeShzdXJ2aXZhbCkKbGlicmFyeSh0aWR5dmVyc2UpCmxpYnJhcnkodGlkeWJheWVzKQpsaWJyYXJ5KHNjYWxlcykKYGBgCiAKCmBgYHtyfQojIGRhdGEsIHBhcmFtZXRlcnMsIG1vZGVsIGFuZCBnZW5lcmF0ZWQgcXVhbnRpdGllcyBibG9ja3MKU3Rhbl9leHBvbmVudGlhbF9zdXJ2aXZhbF9tb2RlbCA8LSAiCmRhdGF7CiAgaW50IDxsb3dlcj0xPiBOX3VuY2Vuc29yZWQ7CiAgaW50IDxsb3dlcj0xPiBOX2NlbnNvcmVkOwogIGludCA8bG93ZXI9MD4gbnVtQ292YXJpYXRlczsKICBtYXRyaXhbTl9jZW5zb3JlZCwgbnVtQ292YXJpYXRlc10gWF9jZW5zb3JlZDsKICBtYXRyaXhbTl91bmNlbnNvcmVkLCBudW1Db3ZhcmlhdGVzXSBYX3VuY2Vuc29yZWQ7CiAgdmVjdG9yIDxsb3dlcj0wPltOX2NlbnNvcmVkXSB0aW1lc19jZW5zb3JlZDsKICB2ZWN0b3IgPGxvd2VyPTA+W05fdW5jZW5zb3JlZF0gdGltZXNfdW5jZW5zb3JlZDsKfQoKcGFyYW1ldGVyc3sKICB2ZWN0b3JbbnVtQ292YXJpYXRlc10gYmV0YTsgLy9yZWdyZXNzaW9uIGNvZWZmaWNpZW50cwogIHJlYWwgYWxwaGE7IC8vaW50ZXJjZXB0Cn0KCm1vZGVsewogIGJldGEgfiBub3JtYWwoMCwxMCk7IC8vcHJpb3Igb24gcmVncmVzc2lvbiBjb2VmZmljaWVudHMKICBhbHBoYSB+IG5vcm1hbCgwLDEwKTsgLy9wcmlvciBvbiBpbnRlcmNlcHQKICB0YXJnZXQgKz0gZXhwb25lbnRpYWxfbHBkZih0aW1lc191bmNlbnNvcmVkIHwgZXhwKGFscGhhK1hfdW5jZW5zb3JlZCAqIGJldGEpKTsgLy9sb2ctbGlrZWxpaG9vZCBwYXJ0IGZvciB1bmNlbnNvcmVkIHRpbWVzCiAgdGFyZ2V0ICs9IGV4cG9uZW50aWFsX2xjY2RmKHRpbWVzX2NlbnNvcmVkIHwgZXhwKGFscGhhK1hfY2Vuc29yZWQgKiBiZXRhKSk7IC8vbG9nLWxpa2VsaWhvb2QgZm9yIGNlbnNvcmVkIHRpbWVzCn0KCmdlbmVyYXRlZCBxdWFudGl0aWVzewogIHZlY3RvcltOX3VuY2Vuc29yZWRdIHRpbWVzX3VuY2Vuc29yZWRfc2FtcGxlZDsgLy9wcmVkaWN0aW9uIG9mIGRlYXRoCiAgZm9yKGkgaW4gMTpOX3VuY2Vuc29yZWQpIHsKICAgIHRpbWVzX3VuY2Vuc29yZWRfc2FtcGxlZFtpXSA9IGV4cG9uZW50aWFsX3JuZyhleHAoYWxwaGErWF91bmNlbnNvcmVkW2ksXSogYmV0YSkpOwogIH0KfQoiCmBgYAoKYGBge3J9CiMgcHJlcGFyZSB0aGUgZGF0YQpzZXQuc2VlZCg0Mik7IApyZXF1aXJlICh0aWR5dmVyc2UpOwpkYXRhIDwtIHJlYWRfY3N2KCcuLi9kYXRhL25lY2Vzc2FyeV9maWVsZHMuY3N2JykKTiA8LSBucm93IChkYXRhKTsKZGF0YSRoaWdoX3Jldl9mcmVxIDwtIGNhcjo6cmVjb2RlKGRhdGEkaGlnaF9yZXZfZnJlcSwgIidUUlVFJyA9IDA7ICdGQUxTRScgPSAxIikKZGF0YSRjZW5zb3JlZCA8LSBjYXI6OnJlY29kZShkYXRhJGNlbnNvcmVkLCAiJ1RSVUUnID0gMDsgJ0ZBTFNFJyA9IDEiKQpYIDwtIGFzLm1hdHJpeChwdWxsKGRhdGEsIGhpZ2hfcmV2X2ZyZXEpKTsgCmlzX2NlbnNvcmVkIDwtIHB1bGwoZGF0YSwgY2Vuc29yZWQpPT0wOyAKdGltZXMgPC0gcHVsbChkYXRhLCBkdXJhdGlvbl9tb250aHMpOyAKbXNrX2NlbnNvcmVkIDwtIGlzX2NlbnNvcmVkID09IDE7IApOX2NlbnNvcmVkIDwtIHN1bShtc2tfY2Vuc29yZWQpOwpgYGAKCmBgYHtyfQojIHB1dCBkYXRhIGludG8gYSBsaXN0IGZvciBTdGFuClN0YW5fZGF0YSA8LSBsaXN0IChOX3VuY2Vuc29yZWQgPSBOIC0gTl9jZW5zb3JlZCwgCiAgICAgICAgICAgICAgICAgICAgTl9jZW5zb3JlZCA9IE5fY2Vuc29yZWQsCiAgICAgICAgICAgICAgICAgICAgbnVtQ292YXJpYXRlcyA9IG5jb2woWCksIAogICAgICAgICAgICAgICAgICAgIFhfY2Vuc29yZWQgPSBhcy5tYXRyaXgoWFttc2tfY2Vuc29yZWQsXSksCiAgICAgICAgICAgICAgICAgICAgWF91bmNlbnNvcmVkID0gYXMubWF0cml4KFhbIW1za19jZW5zb3JlZCAsXSksIAogICAgICAgICAgICAgICAgICAgIHRpbWVzX2NlbnNvcmVkID0gdGltZXNbbXNrX2NlbnNvcmVkXSwKICAgICAgICAgICAgICAgICAgICB0aW1lc191bmNlbnNvcmVkID0gdGltZXNbIW1za19jZW5zb3JlZF0pCmBgYAoKYGBge3J9CiMgZml0IFN0YW4gbW9kZWwKcmVxdWlyZShyc3RhbikKZXhwX3N1cnZfbW9kZWxfZml0IDwtIHN1cHByZXNzTWVzc2FnZXMoc3Rhbihtb2RlbF9jb2RlID0gU3Rhbl9leHBvbmVudGlhbF9zdXJ2aXZhbF9tb2RlbCwgZGF0YSA9IFN0YW5fZGF0YSkpCmBgYAoKYGBge3J9CiMgcHJpbnQgbW9kZWwgZml0CnByaW50KGdldF9zZWVkKGV4cF9zdXJ2X21vZGVsX2ZpdCkpCmBgYAoKYGBge3J9CiMgcHJpbnQgZml0IHN1bW1hcnkKZml0X3N1bW1hcnkgPC0gc3VtbWFyeShleHBfc3Vydl9tb2RlbF9maXQpCnByaW50KGZpdF9zdW1tYXJ5JHN1bW1hcnkpCmBgYAoKYGBge3J9CmV4cF9zdXJ2X21vZGVsX2RyYXdzIDwtIHRpZHliYXllczo6dGlkeV9kcmF3cyhleHBfc3Vydl9tb2RlbF9maXQpCmV4cF9zdXJ2X21vZGVsX2RyYXdzCmBgYAogCmBgYHtyfQojIyBDb25zdHJ1Y3RvciBmb3IgU3RyYXRhLXNwZWNpZmljIHN1cnZpdmFsIGZ1bmN0aW9uCmNvbnN0cnVjdF9zdXJ2aXZhbF9mdW5jdGlvbiA8LSBmdW5jdGlvbihhbHBoYSwgYmV0YSwgeCkgewogICAgZnVuY3Rpb24odCkgewogICAgICAgIGxhbWJkYSA8LSBleHAoYWxwaGEgKyB4KmJldGEpCiAgICAgICAgZXhwKC0obGFtYmRhICogdCkpCiAgICB9Cn0KCiMjIFJhbmRvbSBmdW5jdGlvbnMKZXhwX3N1cnZfbW9kZWxfc3Vydl9mdW5jIDwtCiAgICBleHBfc3Vydl9tb2RlbF9kcmF3cyAlPiUKICAgIHNlbGVjdCguY2hhaW4sIC5pdGVyYXRpb24sIC5kcmF3LCBhbHBoYSwgYGJldGFbMV1gKSAlPiUKICAgICMjIFNpbXBsaWZ5IG5hbWUKICAgIHJlbmFtZShiZXRhID0gYGJldGFbMV1gKSAlPiUKICAgICMjIENvbnN0cnVjdCByZWFsaXphdGlvbiBvZiByYW5kb20gZnVuY3Rpb25zCiAgICBtdXRhdGUoYFModHwxKWAgPSBwbWFwKGxpc3QoYWxwaGEsIGJldGEpLCBmdW5jdGlvbihhLGIpIHtjb25zdHJ1Y3Rfc3Vydml2YWxfZnVuY3Rpb24oYSxiLDEpfSksCiAgICAgICAgICAgYFModHwwKWAgPSBwbWFwKGxpc3QoYWxwaGEsIGJldGEpLCBmdW5jdGlvbihhLGIpIHtjb25zdHJ1Y3Rfc3Vydml2YWxfZnVuY3Rpb24oYSxiLDApfSkpCmV4cF9zdXJ2X21vZGVsX3N1cnZfZnVuYwpgYGAKCmBgYHtyfQp0aW1lcyA8LSBzZXEoZnJvbSA9IDAsIHRvID0gMTY1LCBieSA9IDAuMSkKdGltZXNfZGYgPC0gZGF0YV9mcmFtZSh0ID0gdGltZXMpCgojIyBUcnkgZmlyc3QgcmVhbGl6YXRpb25zCmV4cF9zdXJ2X21vZGVsX3N1cnZfZnVuYyRgUyh0fDEpYFtbMV1dKHRpbWVzWzE6MTBdKQpgYGAKCmBgYHtyfQpleHBfc3Vydl9tb2RlbF9zdXJ2X2Z1bmMkYFModHwwKWBbWzFdXSh0aW1lc1sxOjEwXSkKYGBgCmBgYHtyfQojIyBBcHBseSBhbGwgcmVhbGl6YXRpb25zCmV4cF9zdXJ2X21vZGVsX3N1cnZpdmFsIDwtCiAgICBleHBfc3Vydl9tb2RlbF9zdXJ2X2Z1bmMgJT4lCiAgICBtdXRhdGUodGltZXNfZGYgPSBsaXN0KHRpbWVzX2RmKSkgJT4lCiAgICBtdXRhdGUodGltZXNfZGYgPSBwbWFwKGxpc3QodGltZXNfZGYsIGBTKHR8MSlgLCBgUyh0fDApYCksCiAgICAgICAgICAgICAgICAgICAgICAgICAgIGZ1bmN0aW9uKGRmLCBzMSwgczApIHtkZiAlPiUgbXV0YXRlKHMxID0gczEodCksCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIHMwID0gczAodCkpfSkpICU+JQogICAgc2VsZWN0KC1gUyh0fDEpYCwgLWBTKHR8MClgKSAlPiUKICAgIHVubmVzdChjb2xzID0gYyh0aW1lc19kZikpICU+JQogICAgZ2F0aGVyKGtleSA9IFN0cmF0YSwgdmFsdWUgPSBzdXJ2aXZhbCwgczEsIHMwKSAlPiUKICAgIG11dGF0ZShTdHJhdGEgPSBmYWN0b3IoU3RyYXRhLCAjIFN0cmF0YSBpcyB3aGV0aGVyIG9yIG5vdCBwcm9qZWN0cyBoYXZlIGhpZ2ggY29tbWl0IGZyZXF1ZW5jeQogICAgICAgICAgICAgICAgICAgICAgICAgICAgICBsZXZlbHMgPSBjKCJzMSIsInMwIiksCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIGxhYmVscyA9IGMoInJldi4gcGVyIGRheSA+IDEiLCJyZXYuIHBlciBkYXkgPD0gMSIpKSkKCiMjIEF2ZXJhZ2Ugb24gc3Vydml2YWwgc2NhbGUKZXhwX3N1cnZfbW9kZWxfc3Vydml2YWxfbWVhbiA8LQogICAgZXhwX3N1cnZfbW9kZWxfc3Vydml2YWwgJT4lCiAgICBncm91cF9ieShTdHJhdGEsIHQpICU+JQogICAgc3VtbWFyaXplKHN1cnZpdmFsX21lYW4gPSBtZWFuKHN1cnZpdmFsKSwKICAgICAgICAgICAgICBzdXJ2aXZhbF85NXVwcGVyID0gcXVhbnRpbGUoc3Vydml2YWwsIHByb2JzID0gMC45NzUpLAogICAgICAgICAgICAgIHN1cnZpdmFsXzk1bG93ZXIgPSBxdWFudGlsZShzdXJ2aXZhbCwgcHJvYnMgPSAwLjAyNSkpCgpleHBfc3Vydl9tb2RlbF9zdXJ2aXZhbApgYGAKCmBgYHtyfQojIHBsb3QgdGhlIGdyYXBocwooZ2dwbG90KGRhdGEgPSBleHBfc3Vydl9tb2RlbF9zdXJ2aXZhbCwgbWFwcGluZyA9IGFlcyh4ID0gdCwgeSA9IHN1cnZpdmFsLCBjb2xvciA9IFN0cmF0YSwgZ3JvdXAgPSBpbnRlcmFjdGlvbiguY2hhaW4sLmRyYXcsU3RyYXRhKSkpIAogKyBnZW9tX2xpbmUoc2l6ZSA9IDAuMSwgYWxwaGEgPSAwLjAyKSAKICsgZ2VvbV9saW5lKGRhdGEgPSBleHBfc3Vydl9tb2RlbF9zdXJ2aXZhbF9tZWFuLCBtYXBwaW5nID0gYWVzKHkgPSBzdXJ2aXZhbF9tZWFuLCBncm91cCA9IFN0cmF0YSkpIAogKyBnZW9tX2xpbmUoZGF0YSA9IGV4cF9zdXJ2X21vZGVsX3N1cnZpdmFsX21lYW4sIG1hcHBpbmcgPSBhZXMoeSA9IHN1cnZpdmFsXzk1dXBwZXIsIGdyb3VwID0gU3RyYXRhKSwgbGluZXR5cGUgPSAiZG90dGVkIikgCiArIGdlb21fbGluZShkYXRhID0gZXhwX3N1cnZfbW9kZWxfc3Vydml2YWxfbWVhbiwgbWFwcGluZyA9IGFlcyh5ID0gc3Vydml2YWxfOTVsb3dlciwgZ3JvdXAgPSBTdHJhdGEpLCBsaW5ldHlwZSA9ICJkb3R0ZWQiKQogKyBzY2FsZV9jb2xvcl9odWUoZGlyZWN0aW9uID0gLTEpCiArIHNjYWxlX3lfY29udGludW91cyhsYWJlbHMgPSBwZXJjZW50X2Zvcm1hdCgpLCBsaW1pdHM9YygwLDEpKQogK2xhYnMoeCA9ICJUaW1lIChNb250aHMpIiwgeSA9ICJTdXJ2aXZhbCBwcm9iYWJpbGl0eSIpCiArIHRoZW1lX2J3KCkKICsgdGhlbWUoYXhpcy50ZXh0LnggPSBlbGVtZW50X3RleHQoYW5nbGUgPSA5MCwgdmp1c3QgPSAwLjUpLCBsZWdlbmQua2V5ID0gZWxlbWVudF9ibGFuaygpLCBsZWdlbmQucG9zaXRpb24gPSAndG9wJywgcGxvdC50aXRsZSA9IGVsZW1lbnRfdGV4dChoanVzdCA9IDAuNSksIHN0cmlwLmJhY2tncm91bmQgPSBlbGVtZW50X2JsYW5rKCkpKQpgYGAK